#[derive(RustcDecodable)]
struct Options {
flag_no_run: bool,
- flag_package: Option<String>,
+ flag_package: Vec<String>,
flag_jobs: Option<u32>,
flag_features: Vec<String>,
flag_no_default_features: bool,
Execute all benchmarks of a local package
Usage:
- cargo bench [options] [--] [<args>...]
+ cargo bench [options] [-p SPEC --package SPEC]... [--] [<args>...]
Options:
-h, --help Print this message
target: options.flag_target.as_ref().map(|s| &s[..]),
features: &options.flag_features,
no_default_features: options.flag_no_default_features,
- spec: options.flag_package.as_ref().map(|s| &s[..]),
+ spec: &options.flag_package,
exec_engine: None,
release: true,
mode: ops::CompileMode::Bench,
#[derive(RustcDecodable)]
struct Options {
- flag_package: Option<String>,
+ flag_package: Vec<String>,
flag_jobs: Option<u32>,
flag_features: Vec<String>,
flag_no_default_features: bool,
Compile a local package and all of its dependencies
Usage:
- cargo build [options]
+ cargo build [options] [-p SPEC --package SPEC]...
Options:
-h, --help Print this message
target: options.flag_target.as_ref().map(|t| &t[..]),
features: &options.flag_features,
no_default_features: options.flag_no_default_features,
- spec: options.flag_package.as_ref().map(|s| &s[..]),
+ spec: &options.flag_package,
exec_engine: None,
mode: ops::CompileMode::Build,
release: options.flag_release,
flag_release: bool,
flag_quiet: bool,
flag_color: Option<String>,
- flag_package: Option<String>,
+ flag_package: Vec<String>,
}
pub const USAGE: &'static str = "
Build a package's documentation
Usage:
- cargo doc [options]
+ cargo doc [options] [-p SPEC --package SPEC]...
Options:
-h, --help Print this message
target: options.flag_target.as_ref().map(|t| &t[..]),
features: &options.flag_features,
no_default_features: options.flag_no_default_features,
- spec: options.flag_package.as_ref().map(|s| &s[..]),
+ spec: &options.flag_package,
exec_engine: None,
filter: ops::CompileFilter::Everything,
release: options.flag_release,
target: options.flag_target.as_ref().map(|t| &t[..]),
features: &options.flag_features,
no_default_features: options.flag_no_default_features,
- spec: None,
+ spec: &[],
exec_engine: None,
release: options.flag_release,
mode: ops::CompileMode::Build,
#[derive(RustcDecodable)]
struct Options {
arg_opts: Option<Vec<String>>,
- flag_package: Option<String>,
+ flag_package: Vec<String>,
flag_jobs: Option<u32>,
flag_features: Vec<String>,
flag_no_default_features: bool,
Compile a package and all of its dependencies
Usage:
- cargo rustc [options] [--] [<opts>...]
+ cargo rustc [options] [-p SPEC --package SPEC]... [--] [<opts>...]
Options:
-h, --help Print this message
target: options.flag_target.as_ref().map(|t| &t[..]),
features: &options.flag_features,
no_default_features: options.flag_no_default_features,
- spec: options.flag_package.as_ref().map(|s| &s[..]),
+ spec: &options.flag_package,
exec_engine: None,
mode: ops::CompileMode::Build,
release: options.flag_release,
flag_manifest_path: Option<String>,
flag_no_default_features: bool,
flag_no_run: bool,
- flag_package: Option<String>,
+ flag_package: Vec<String>,
flag_target: Option<String>,
flag_lib: bool,
flag_bin: Vec<String>,
Execute all unit and integration tests of a local package
Usage:
- cargo test [options] [--] [<args>...]
+ cargo test [options] [-p SPEC --package SPEC]... [--] [<args>...]
Options:
-h, --help Print this message
target: options.flag_target.as_ref().map(|s| &s[..]),
features: &options.flag_features,
no_default_features: options.flag_no_default_features,
- spec: options.flag_package.as_ref().map(|s| &s[..]),
+ spec: &options.flag_package,
exec_engine: None,
release: options.flag_release,
mode: ops::CompileMode::Test,
let profiles = Profiles::default();
let cx = try!(Context::new(&resolve, &srcs, &pkgs, opts.config,
Layout::at(target_dir),
- None, &pkg, BuildConfig::default(),
+ None, BuildConfig::default(),
&profiles));
// And finally, clean everything out!
use std::sync::Arc;
use core::registry::PackageRegistry;
-use core::{Source, SourceId, PackageSet, Package, Target};
+use core::{Source, SourceId, PackageSet, Package, Target, PackageId};
use core::{Profile, TargetKind};
use core::resolver::Method;
use ops::{self, BuildOutput, ExecEngine};
/// Flag if the default feature should be built for the root package
pub no_default_features: bool,
/// Root package to build (if None it's the current one)
- pub spec: Option<&'a str>,
+ pub spec: &'a [String],
/// Filter to apply to the root package to select which targets will be
/// built.
pub filter: CompileFilter<'a>,
compile_pkg(&package, options)
}
-pub fn compile_pkg<'a>(package: &Package,
+pub fn compile_pkg<'a>(root_package: &Package,
options: &CompileOptions<'a>)
-> CargoResult<ops::Compilation<'a>> {
- let CompileOptions { config, jobs, target, spec, features,
+ let CompileOptions { config, jobs, target, ref spec, features,
no_default_features, release, mode,
ref filter, ref exec_engine,
ref target_rustc_args } = *options;
s.split(' ')
}).map(|s| s.to_string()).collect::<Vec<String>>();
- if spec.is_some() && (no_default_features || features.len() > 0) {
+ if spec.len() > 0 && (no_default_features || features.len() > 0) {
return Err(human("features cannot be modified when the main package \
is not being built"))
}
return Err(human("jobs must be at least 1"))
}
- let override_ids = try!(source_ids_from_config(config, package.root()));
-
let (packages, resolve_with_overrides, sources) = {
- let mut registry = PackageRegistry::new(config);
+ let override_ids =
+ try!(source_ids_from_config(options.config, root_package.root()));
+ let mut registry = PackageRegistry::new(options.config);
+ if let Some(source) = source {
+ registry.preload(root_package.package_id().source_id(), source);
+ } else {
+ try!(registry.add_sources(&[root_package.package_id().source_id()
+ .clone()]));
+ }
- // First, resolve the package's *listed* dependencies, as well as
+ // First, resolve the root_package's *listed* dependencies, as well as
// downloading and updating all remotes and such.
- let resolve = try!(ops::resolve_pkg(&mut registry, package));
+ let resolve = try!(ops::resolve_pkg(&mut registry, root_package));
// Second, resolve with precisely what we're doing. Filter out
// transitive dependencies if necessary, specify features, handle
try!(registry.add_overrides(override_ids));
- let method = Method::Required {
+ let method = Method::Required{
dev_deps: true, // TODO: remove this option?
features: &features,
- uses_default_features: !no_default_features,
+ uses_default_features: !options.no_default_features,
};
let resolved_with_overrides =
- try!(ops::resolve_with_previous(&mut registry, package, method,
+ try!(ops::resolve_with_previous(&mut registry, root_package, method,
Some(&resolve), None));
- let packages = try!(ops::get_resolved_packages(&resolved_with_overrides, &mut registry));
+ let req: Vec<PackageId> = resolved_with_overrides.iter().map(|r| {
+ r.clone()
+ }).collect();
+ let packages = try!(registry.get(&req).chain_error(|| {
+ human("Unable to get packages from source")
+ }));
(packages, resolved_with_overrides, registry.move_sources())
};
- let pkgid = match spec {
- Some(spec) => try!(resolve_with_overrides.query(spec)),
- None => package.package_id(),
+ let mut invalid_spec = vec![];
+ let pkgids = if spec.len() > 0 {
+ spec.iter().filter_map(|p| {
+ match resolve_with_overrides.query(&p) {
+ Ok(p) => Some(p),
+ Err(..) => { invalid_spec.push(p.to_string()); None }
+ }
+ }).collect::<Vec<_>>()
+ } else {
+ vec![root_package.package_id()]
};
- let to_build = packages.iter().find(|p| p.package_id() == pkgid).unwrap();
- let targets = try!(generate_targets(to_build, mode, filter, release));
-
- let target_with_args = match *target_rustc_args {
- Some(args) if targets.len() == 1 => {
- let (target, profile) = targets[0];
- let mut profile = profile.clone();
- profile.rustc_args = Some(args.to_vec());
- Some((target, profile))
- }
- Some(_) => {
- return Err(human("extra arguments to `rustc` can only be passed to \
- one target, consider filtering\nthe package by \
- passing e.g. `--lib` or `--bin NAME` to specify \
- a single target"))
+
+ /*
+ if spec.len() > 0 && invalid_spec.len() > 0 {
+ return Err(human(format!("could not find package matching spec `{}`",
+ invalid_spec.join(", "))));
+ } */
+
+ let to_builds = packages.iter().filter(|p|
+ pkgids.iter().find(|&op| *op == p.package_id()).is_some()
+ ).collect::<Vec<&Package>>();
+
+ let mut twas = &mut vec![];
+ let mut package_targets = vec![];
+
+ for &to_build in to_builds.iter() {
+ let targets = try!(generate_targets(to_build, mode, filter, release));
+
+ match *target_rustc_args {
+ Some(args) if targets.len() == 1 => {
+ let (target, profile) = targets[0];
+ let mut profile = profile.clone();
+ profile.rustc_args = Some(args.to_vec());
+ twas.push((target, profile));
+ }
+ Some(_) => {
+ return Err(human("extra arguments to `rustc` can only be \
+ passed to one target, consider \
+ filtering\nthe package by passing e.g. \
+ `--lib` or `--bin NAME` to specify \
+ a single target"))
+ }
+ None => package_targets.push((to_build, targets)),
+ };
+
+ }
+
+ for targets in twas {
+ let (target, ref profile) = *targets;
+ for &to_build in to_builds.iter() {
+ package_targets.push((to_build, vec![(target, profile)]));
}
- None => None,
- };
+ }
- let targets = target_with_args.as_ref().map(|&(t, ref p)| vec![(t, p)])
- .unwrap_or(targets);
- let ret = {
+ let mut ret = {
let _p = profile::start("compiling");
let mut build_config = try!(scrape_build_config(config, jobs, target));
build_config.exec_engine = exec_engine.clone();
build_config.doc_all = deps;
}
- try!(ops::compile_targets(&targets, to_build,
+ try!(ops::compile_targets(&package_targets,
&PackageSet::new(&packages),
&resolve_with_overrides,
&sources,
config,
build_config,
- to_build.manifest().profiles()))
+ root_package.manifest().profiles(),
+ ))
};
+ ret.to_doc_test = to_builds.iter().map(|&p| p.clone()).collect();
+
return Ok(ret);
}
let mut lib_names = HashSet::new();
let mut bin_names = HashSet::new();
- if options.compile_opts.spec.is_none() {
+ if options.compile_opts.spec.len() == 0 {
for target in package.targets().iter().filter(|t| t.documented()) {
if target.is_lib() {
assert!(lib_names.insert(target.crate_name()));
try!(ops::compile(manifest_path, &options.compile_opts));
if options.open_result {
- let name = match options.compile_opts.spec {
- Some(spec) => try!(PackageIdSpec::parse(spec)).name().replace("-", "_").to_string(),
- None => {
- match lib_names.iter().chain(bin_names.iter()).nth(0) {
- Some(s) => s.to_string(),
- None => return Ok(())
- }
+ let name = if options.compile_opts.spec.len() > 0{
+ // TODO
+ try!(PackageIdSpec::parse(options.compile_opts.spec.first()
+ .unwrap())).name().replace("-", "_")
+ .to_string()
+ } else {
+ match lib_names.iter().chain(bin_names.iter()).nth(0) {
+ Some(s) => s.to_string(),
+ None => return Ok(())
}
};
target: None,
features: &[],
no_default_features: false,
- spec: None,
+ spec: &[],
filter: ops::CompileFilter::Everything,
exec_engine: None,
release: false,
pub libraries: HashMap<PackageId, Vec<(Target, PathBuf)>>,
/// An array of all tests created during this compilation.
- pub tests: Vec<(String, PathBuf)>,
+ pub tests: Vec<(Package, Vec<(String, PathBuf)>)>,
/// An array of all binaries created.
pub binaries: Vec<PathBuf>,
/// be passed to future invocations of programs.
pub extra_env: HashMap<String, String>,
- /// Top-level package that was compiled
- pub package: Package,
+ pub to_doc_test: Vec<Package>,
/// Features enabled during this compilation.
pub features: HashSet<String>,
}
impl<'cfg> Compilation<'cfg> {
- pub fn new(pkg: &Package, config: &'cfg Config) -> Compilation<'cfg> {
+ pub fn new(config: &'cfg Config) -> Compilation<'cfg> {
Compilation {
libraries: HashMap::new(),
native_dirs: HashMap::new(), // TODO: deprecated, remove
tests: Vec::new(),
binaries: Vec::new(),
extra_env: HashMap::new(),
- package: pkg.clone(),
+ to_doc_test: Vec::new(),
features: HashSet::new(),
config: config,
}
config: &'cfg Config,
host: Layout,
target_layout: Option<Layout>,
- root_pkg: &Package,
build_config: BuildConfig,
profiles: &'a Profiles) -> CargoResult<Context<'a, 'cfg>> {
let target = build_config.requested_target.clone();
host_dylib: host_dylib,
host_exe: host_exe,
requirements: HashMap::new(),
- compilation: Compilation::new(root_pkg, config),
+ compilation: Compilation::new(config),
build_state: Arc::new(BuildState::new(&build_config, deps)),
build_config: build_config,
exec_engine: engine,
// Returns a mapping of the root package plus its immediate dependencies to
// where the compiled libraries are all located.
-pub fn compile_targets<'a, 'cfg: 'a>(targets: &[(&'a Target, &'a Profile)],
- pkg: &'a Package,
+pub fn compile_targets<'a, 'cfg: 'a>(pkg_targets: &'a [(&Package,
+ Vec<(&Target,
+ &'a Profile)>)],
deps: &'a PackageSet,
resolve: &'a Resolve,
sources: &'a SourceMap<'cfg>,
build_config: BuildConfig,
profiles: &'a Profiles)
-> CargoResult<Compilation<'cfg>> {
- if targets.is_empty() {
- return Ok(Compilation::new(pkg, config))
- }
- debug!("compile_targets: {}", pkg);
+ debug!("compile_targets: {}", pkg_targets.iter().map(|&(ref p, _)| p.name())
+ .collect::<Vec<_>>().join(", "));
try!(links::validate(deps));
let dest = if build_config.release {"release"} else {"debug"};
- let root = if resolve.root() == pkg.package_id() {
- pkg
- } else {
- deps.iter().find(|p| p.package_id() == resolve.root()).unwrap()
- };
+ let root = deps.iter().find(|p| p.package_id() == resolve.root()).unwrap();
let host_layout = Layout::new(config, root, None, &dest);
let target_layout = build_config.requested_target.as_ref().map(|target| {
layout::Layout::new(config, root, Some(&target), &dest)
});
let mut cx = try!(Context::new(resolve, sources, deps, config,
- host_layout, target_layout, pkg,
+ host_layout, target_layout,
build_config, profiles));
let mut queue = JobQueue::new(cx.resolve, deps, cx.jobs());
- // Prep the context's build requirements and see the job graph for all
- // packages initially.
{
let _p = profile::start("preparing build directories");
- try!(cx.prepare(pkg, targets));
- prepare_init(&mut cx, pkg, &mut queue, &mut HashSet::new());
- custom_build::build_map(&mut cx, pkg, targets);
+ // Prep the context's build requirements and see the job graph for all
+ // packages initially.
+ for &(pkg, ref targets) in pkg_targets {
+ try!(cx.prepare(pkg, targets));
+ prepare_init(&mut cx, pkg, &mut queue, &mut HashSet::new());
+ custom_build::build_map(&mut cx, pkg, targets);
+ }
}
- // Build up a list of pending jobs, each of which represent compiling a
- // particular package. No actual work is executed as part of this, that's
- // all done next as part of the `execute` function which will run
- // everything in order with proper parallelism.
- try!(compile(targets, pkg, &mut cx, &mut queue));
+ for &(pkg, ref targets) in pkg_targets {
+ // Build up a list of pending jobs, each of which represent
+ // compiling a particular package. No actual work is executed as
+ // part of this, that's all done next as part of the `execute`
+ // function which will run everything in order with proper
+ // parallelism.
+ try!(compile(targets, pkg, &mut cx, &mut queue));
+ }
// Now that we've figured out everything that we're going to do, do it!
try!(queue.execute(cx.config));
- let out_dir = cx.layout(pkg, Kind::Target).build_out(pkg)
- .display().to_string();
- cx.compilation.extra_env.insert("OUT_DIR".to_string(), out_dir);
+ for &(pkg, ref targets) in pkg_targets.iter() {
+ let out_dir = cx.layout(pkg, Kind::Target).build_out(pkg)
+ .display().to_string();
+ cx.compilation.extra_env.insert("OUT_DIR".to_string(), out_dir);
+
+ let mut tests = vec![];
+
+ for &(target, profile) in targets {
+ let kind = Kind::from(target);
+ for filename in try!(cx.target_filenames(pkg, target, profile,
+ kind)).iter() {
+ let dst = cx.out_dir(pkg, kind, target).join(filename);
+ if profile.test {
+ tests.push((target.name().to_string(), dst));
+ } else if target.is_bin() || target.is_example() {
+ cx.compilation.binaries.push(dst);
+ } else if target.is_lib() {
+ let pkgid = pkg.package_id().clone();
+ cx.compilation.libraries.entry(pkgid).or_insert(Vec::new())
+ .push((target.clone(), dst));
+ }
+ if !target.is_lib() { continue }
- for &(target, profile) in targets {
- let kind = Kind::from(target);
- for filename in try!(cx.target_filenames(pkg, target, profile,
- kind)).iter() {
- let dst = cx.out_dir(pkg, kind, target).join(filename);
- if profile.test {
- cx.compilation.tests.push((target.name().to_string(), dst));
- } else if target.is_bin() || target.is_example() {
- cx.compilation.binaries.push(dst);
- } else if target.is_lib() {
- let pkgid = pkg.package_id().clone();
- cx.compilation.libraries.entry(pkgid).or_insert(Vec::new())
- .push((target.clone(), dst));
- }
- if !target.is_lib() { continue }
+ // Include immediate lib deps as well
+ for dep in &cx.dep_targets(pkg, target, kind, profile) {
+ let (pkg, target, profile) = *dep;
+ let pkgid = pkg.package_id();
+ if !target.is_lib() { continue }
+ if profile.doc { continue }
+ if cx.compilation.libraries.contains_key(&pkgid) {
+ continue
+ }
- // Include immediate lib deps as well
- for dep in cx.dep_targets(pkg, target, kind, profile) {
- let (pkg, target, profile) = dep;
- let pkgid = pkg.package_id();
- if !target.is_lib() { continue }
- if profile.doc { continue }
- if cx.compilation.libraries.contains_key(&pkgid) { continue }
-
- let kind = kind.for_target(target);
- let v = try!(cx.target_filenames(pkg, target, profile, kind));
- let v = v.into_iter().map(|f| {
- (target.clone(), cx.out_dir(pkg, kind, target).join(f))
- }).collect::<Vec<_>>();
- cx.compilation.libraries.insert(pkgid.clone(), v);
+ let kind = kind.for_target(target);
+ let v =
+ try!(cx.target_filenames(pkg, target, profile, kind));
+ let v = v.into_iter().map(|f| {
+ (target.clone(), cx.out_dir(pkg, kind, target).join(f))
+ }).collect::<Vec<_>>();
+ cx.compilation.libraries.insert(pkgid.clone(), v);
+ }
}
}
- }
- if let Some(feats) = cx.resolve.features(pkg.package_id()) {
- cx.compilation.features.extend(feats.iter().cloned());
+ cx.compilation.tests.push((pkg.clone(), tests));
+
+ if let Some(feats) = cx.resolve.features(pkg.package_id()) {
+ cx.compilation.features.extend(feats.iter().cloned());
+ }
}
for (&(ref pkg, _), output) in cx.build_state.outputs.lock().unwrap().iter() {
});
// Figure out what stage this work will go into
- let dst = match (target.is_lib(),
+ let stage = match (target.is_lib(),
profile.test,
target.is_custom_build()) {
- (_, _, true) => jobs.queue(pkg, Stage::BuildCustomBuild),
- (true, true, _) => jobs.queue(pkg, Stage::LibraryTests),
- (false, true, _) => jobs.queue(pkg, Stage::BinaryTests),
- (true, false, _) => jobs.queue(pkg, Stage::Libraries),
- (false, false, _) if !target.is_bin() => {
- jobs.queue(pkg, Stage::BinaryTests)
- }
- (false, false, _) => jobs.queue(pkg, Stage::Binaries),
+ (_, _, true) => Stage::BuildCustomBuild,
+ (true, true, _) => Stage::LibraryTests,
+ (false, true, _) => Stage::BinaryTests,
+ (true, false, _) => Stage::Libraries,
+ (false, false, _) if !target.is_bin() => Stage::BinaryTests,
+ (false, false, _) => Stage::Binaries,
};
+ let dst = jobs.queue(pkg, stage);
dst.push((Job::new(dirty, fresh), freshness));
}
}
Ok(errors)
}
+
+fn build_and_run<'a>(manifest_path: &Path,
+ options: &TestOptions<'a>,
+ test_args: &[String])
+ -> CargoResult<Result<Compilation<'a>, ProcessError>> {
+ let config = options.compile_opts.config;
+ let mut source = try!(PathSource::for_path(&manifest_path.parent().unwrap(),
+ config));
+ try!(source.update());
+
+ let mut compile = try!(ops::compile(manifest_path, &options.compile_opts));
+ if options.no_run { return Ok(Ok(compile)) }
+ compile.tests.iter_mut()
+ .map(|&mut (_, ref mut tests)|
+ tests.sort_by(|&(ref n1, _), &(ref n2, _)| n1.cmp(n2)))
+ .collect::<Vec<_>>();
+
+ let cwd = config.cwd();
+ for &(ref pkg, ref tests) in &compile.tests {
+ for &(_, ref exe) in tests {
+ let to_display = match util::without_prefix(exe, &cwd) {
+ Some(path) => path,
+ None => &**exe,
+ };
+ let mut cmd = try!(compile.target_process(exe, pkg));
+ cmd.args(test_args);
+ try!(config.shell().concise(|shell| {
+ shell.status("Running", to_display.display().to_string())
+ }));
+ try!(config.shell().verbose(|shell| {
+ shell.status("Running", cmd.to_string())
+ }));
+ match ExecEngine::exec(&mut ProcessEngine, cmd) {
+ Ok(()) => {}
+ Err(e) => return Ok(Err(e))
+ }
+ }
+ }
+
+ Ok(Ok(compile))
+}
assert_that(p.cargo("rustc").arg("-v").arg("--").arg("-Zno-trans"),
execs().with_status(0));
});
+
+test!(build_multiple_packages {
+
+
+ let p = project("foo")
+ .file("Cargo.toml", r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.d1]
+ path = "d1"
+ [dependencies.d2]
+ path = "d2"
+
+ [[bin]]
+ name = "foo"
+ "#)
+ .file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
+ .file("d1/Cargo.toml", r#"
+ [package]
+ name = "d1"
+ version = "0.0.1"
+ authors = []
+
+ [[bin]]
+ name = "d1"
+ "#)
+ .file("d1/src/lib.rs", "")
+ .file("d1/src/main.rs", "fn main() { println!(\"d1\"); }")
+ .file("d2/Cargo.toml", r#"
+ [package]
+ name = "d2"
+ version = "0.0.1"
+ authors = []
+
+ [[bin]]
+ name = "d2"
+ doctest = false
+ "#)
+ .file("d2/src/main.rs", "fn main() { println!(\"d2\"); }");
+ p.build();
+
+ assert_that(p.cargo_process("build").arg("-p").arg("d1").arg("-p").arg("d2").arg("-p").arg("foo"), execs());
+
+ assert_that(&p.bin("foo"), existing_file());
+ assert_that(process(&p.bin("foo")).unwrap(),
+ execs().with_stdout("i am foo\n"));
+
+ assert_that(&p.build_dir().join("debug").join("deps").join("d1"), existing_file());
+ assert_that(process(&p.build_dir().join("debug").join("deps").join("d1")).unwrap(),
+ execs().with_stdout("d1"));
+
+ assert_that(&p.build_dir().join("debug").join("deps").join("d2"), existing_file());
+ assert_that(process(&p.build_dir().join("debug").join("deps").join("d2")).unwrap(),
+ execs().with_stdout("d2"));
+});
{running} `rustdoc src[..]lib.rs [..]`
", compiling = COMPILING, running = RUNNING)));
});
+
+test!(doc_multiple_deps {
+ let p = project("foo")
+ .file("Cargo.toml", r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.bar]
+ path = "bar"
+
+ [dependencies.baz]
+ path = "baz"
+ "#)
+ .file("src/lib.rs", r#"
+ extern crate bar;
+ pub fn foo() {}
+ "#)
+ .file("bar/Cargo.toml", r#"
+ [package]
+ name = "bar"
+ version = "0.0.1"
+ authors = []
+ "#)
+ .file("bar/src/lib.rs", r#"
+ pub fn bar() {}
+ "#)
+ .file("baz/Cargo.toml", r#"
+ [package]
+ name = "baz"
+ version = "0.0.1"
+ authors = []
+ "#)
+ .file("baz/src/lib.rs", r#"
+ pub fn baz() {}
+ "#);
+
+ assert_that(p.cargo_process("doc").arg("-p").arg("bar").arg("-p").arg("baz"),
+ execs().with_status(0));
+
+ assert_that(&p.root().join("target/doc"), existing_dir());
+ assert_that(&p.root().join("target/doc/bar/index.html"), existing_file());
+ assert_that(&p.root().join("target/doc/baz/index.html"), existing_file());
+});
", compiling = COMPILING, running = RUNNING, doctest = DOCTEST)))
});
+
+test!(test_multiple_packages {
+ let p = project("foo")
+ .file("Cargo.toml", r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies.d1]
+ path = "d1"
+ [dependencies.d2]
+ path = "d2"
+
+ [lib]
+ name = "foo"
+ doctest = false
+ "#)
+ .file("src/lib.rs", "")
+ .file("d1/Cargo.toml", r#"
+ [package]
+ name = "d1"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ name = "d1"
+ doctest = false
+ "#)
+ .file("d1/src/lib.rs", "")
+ .file("d2/Cargo.toml", r#"
+ [package]
+ name = "d2"
+ version = "0.0.1"
+ authors = []
+
+ [lib]
+ name = "d2"
+ doctest = false
+ "#)
+ .file("d2/src/lib.rs", "");
+ p.build();
+
+ assert_that(p.cargo("test").arg("-p").arg("d1").arg("-p").arg("d2"),
+ execs().with_status(0)
+ .with_stdout_contains(&format!("\
+{running} target[..]debug[..]d1-[..]
+
+running 0 tests
+
+test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
+", running = RUNNING))
+ .with_stdout_contains(&format!("\
+{running} target[..]debug[..]d2-[..]
+
+running 0 tests
+
+test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
+", running = RUNNING)));
+});